% scribe: Peng Li % lastupdate: Oct. 16, 2005 % lecture: 9 % title: Basic ${\cal L}^2$ Convergence Theorem % references: Durrett, section 1.8 % keywords: Basic L2 Convergence Theorem, Kolmogorov's Three-Series Theorem, Kolmogorov's 0-1 Law, tail $\sigma$-field, Kronecker's lemma % end \documentclass[12pt,letterpaper]{article} \include{macros} \begin{document} \lecture{9}{Basic ${\cal L}^2$ Convergence Theorem}{Peng Li} {pengli@berkeley.edu} (This note is a revision of the work of Vinod Prabhakaran from 2002.) \section{Basic ${\cal L}^2$ Convergence Theorem} % keywords: Basic L2 Convergence Theorem % end \begin{theorem}[Basic ${\cal L}^2$ Convergence Theorem] Let $X_1\;X_2,\;\ldots$ be independent random variables with $\E(X_i)=0$ and $\E(X_i^2)=\sigma_i^2<\infty$, $i=1,2,\ldots$, and $S_n=X_1+X_2+\cdots+X_n$. If $\sum_{i=1}^{\infty}\sigma_i^2<\infty$, then $S_n$ converges a.s.\ and in ${\cal L}^2$ to some $S_\infty$ with $\E(S_\infty^2)=\sum_{i=1}^{\infty} \sigma_i^2$. \end{theorem} {\em Recall}: We have done this before, with the conclusion for the ${\cal L}^2$ case with the weaker assumption that $\E (X_i X_j)=0$ for $i\neq j$. The only new thing is the conclusion of a.s.\ convergence for the independent case. In fact, the proof just uses Kolmogorov's inequality from the last lecture. Thus the conclusion is valid for a martingale $\{S_n\}$ with $\E [X_{n+1} f(X_1, \ldots, X_n)]=0$ for all bounded measurable $f: \R^n \rightarrow \R$. \begin{proof} First note that ${\cal L}^2$ convergence and existence of $S_\infty$ is implied by the orthogonality of the $X_i$'s: since $\E(X_iX_j)=0$ for $i\neq j$, \begin{eqnarray*} \E(S_n^2)&=&\sum_{i=1}^{n} \sigma_i^2\mbox{, and}\\ \E((S_n-S_m)^2)&=&\sum_{i=m+1}^{n}\sigma_i^2\;\rightarrow\; 0 \mbox{ as } m,n\;\rightarrow\;\infty \mbox{,} \end{eqnarray*} so $S_n$ is Cauchy in ${\cal L}^2$. Since ${\cal L}^2$ is complete, there is a unique $S_\infty$ (up to a.s.\ equivalence) such that $S_n\;\rightarrow\;S_\infty$ in ${\cal L}^2$. Turning to a.s.\ convergence, the method is to show the sequence $(S_n)$ is a.s.\ Cauchy. The limit of $S_n$ then exists a.s.\ by completeness of the set of real numbers. The same argument applies more generally to martingale differences $X_i$. Note that this method gives $S_\infty$ more explicitly, and does not appeal to completeness of ${\cal L}^2$. Recall that $S_n$ is Cauchy a.s.\ means $M_n:=\sup_{p,q\geq n} |S_p-S_q|\; \rightarrow\; 0$ a.s. Note that $0\leq M_n(\omega)\downarrow$ implies that $M_n(\omega)$ converges to a limit in $[0,\infty]$. So, if $\P(M_n>\epsilon)\; \rightarrow\; 0$ for all $\epsilon>0$, then $M_n\downarrow 0$ a.s. Let $M_n^\ast:=\sup_{p\geq n}|S_p-S_n|$. By the triangle inequality, \[ |S_p-S_q|\leq|S_p-S_n|+|S_q-S_n|\; \Rightarrow\; M_n^\ast\leq M_n\leq 2M_n^\ast, \] so it is sufficient to show that $M_n^\ast \; \stackrel{P}{\rightarrow} \; 0$. For all $\epsilon>0$, \begin{align*} \P\left(\sup_{p\geq n}|S_p-S_n|>\epsilon\right)& = \lim_{N\rightarrow\infty}\P\left(\max_{n\leq p\leq N}|S_p-S_n|>\epsilon\right)\\ & \leq \lim_{N\rightarrow\infty} \sum_{i=n+1}^{N} \frac{\sigma_i^2}{\epsilon^2} = \sum_{i=n+1}^{\infty} \frac{\sigma_i^2}{\epsilon^2} \end{align*} where we applied Kolmogorov's inequality in the second step. Since $\sum_{i=1}^{\infty} \sigma_i^2 < \infty$, \[ \lim_{n\rightarrow\infty} \P\left(\sup_{p\leq n}|S_p-S_n|>\epsilon\right)=0 \] \end{proof} {\em Remark}: Just orthogonality rather than independence of the $X_i$s is not enough to get an a.s.\ limit. Counterexamples are hard. According to classical results of Rademacher-Menchoff, for orthogonal $X_i$ the condition $\sum_i (\log ^2 i ) \sigma_i^2 < \infty$ is enough for a.s.\ convergence of $S_n$, whereas if $b_i \uparrow$ with $b_i = o(\log^2 i)$ there exist orthogonal $X_i$ such that $\sum_i b_i \sigma_i^2 < \infty$ and $S_n$ diverges almost surely. \section{Kolmogorov's Three-Series Theorem} % keywords: Kolmogorov's Three-Series Theorem % end An easy consequence of the Basic ${\cal L}^2$ Convergence Theorem is the sufficiency part of Kolmogorov's three-series theorem: \begin{theorem}[Kolmogorov] Let $X_1,X_2,\ldots$ be independent. Fix $b>0$. Convergence of the following three series \begin{itemize} \item $\sum_n \P(|X_n|>b)<\infty$ \item $\sum_n \E(X_n\1_{(|X_n|b)<\infty$, the Borel-Cantelli lemma gives $\P(X'_n\neq X_n \mbox{ i.o.})=0$ which implies $\P(X'_n=X_n \mbox{ ev.})=1$. Also if $X'_n(\omega)=X_n(\omega)$ ev., then $\sum_nX_n(\omega)$ converges $\Leftrightarrow$ $\sum_nX'_n(\omega)$ converges. Therefore it is enough to show that \[\P\left(\sum_n X'_n \mbox{ converges to a finite limit}\right)=1\] Now \[\sum_{n=1}^{N} X'_n = \sum_{n=1}^N (X'_n-\E(X'_n))\;+\;\sum_{n=1}^N \E(X'_n).\] $\sum_{n=1}^N \E(X'_n)$ has a limit as $N\rightarrow\infty$ by hypothesis, and \[\sum_{n=1}^\infty \E((X'_n-\E(X'_n))^2)=\sum_{n=1}^\infty \mbox{var}(X'_n)<\infty \] implies that $\sum_{n=1}^\infty(X'_n-\E(X'_n))$ converges a.s.\ by the basic ${\cal L}^2$ convergence theorem. \end{proof} \section{Kolmogorov's 0-1 Law} % keywords: Kolmogorov's 0-1 Law, tail $\sigma$-field, Kronecker's lemma. % end $X_1, X_2, \ldots$ are independent random variables (not necessarily real valued). Let ${\cal F}_n'=\sigma(X_n, X_{n+1}, \ldots)=$ the future after time $n=$ the smallest $\sigma$-field with respect to which all the $X_m, m \geq n$ are measurable. Let ${\cal T}=\cap_n {\cal F}_n'=$ the remote future, or {\it tail} $\sigma-$field.\\ \begin{example} $\{\omega : S_n(\omega) \mbox{ converges} \} \in {\cal T}$. \end{example} \begin{theorem}[Kolmogorov's 0-1 Law]\ If $X_1, X_2, \ldots$ are independent and $A \in {\cal T}$ then $\P(A)=0$ or 1. \end{theorem} \begin{proof} The idea is to show that $A$ is independent of itself, that is, $\P(A\cap A)=\P(A) \P(A)$, so $\P(A)=\P(A)^2$, and hence $\P(A)=0$ or 1. We will prove this in two steps: (a) $A \in \sigma (X_1, \ldots, X_k)$ and $B \in \sigma(X_{k+1}, X_{k+2}, \ldots)$ are independent.\\ Proof of (a): If $B \in \sigma(X_{k+1}, \ldots, X_{k+j})$ for some $j$, this follows from (4.5) in chapter 1 of \cite{durrett}. Since $\sigma(X_1, \ldots, X_k)$ and $\cup_j \sigma(X_{k+1}, \ldots, X_{k+j})$ are $\pi$-systems that contains $\Omega$ (a) follows from (4.5) in chapter 1 of \cite{durrett}).\\ (b) $A \in \sigma (X_1, X_2, \ldots)$ and $B \in {\cal T}$ are independent.\\ Proof of (b): Since ${\cal T} \subset \sigma(X_{k+1}, X_{k+2}, \ldots)$, if $A \in \sigma (X_1, \ldots, X_k)$ for some $k$, this follows from (a). $\cup_k \sigma(X_1, \ldots, X_k)$ and ${\cal T}$ are $\pi$-systems that contain $\Omega$, so (b) follows from (4.2) in chapter 1 of \cite{durrett}.\\ Since ${\cal T} \subset \sigma(X_{1}, X_{2}, \ldots)$, (b) implies that $A \in {\cal T}$ is independent of itself and the theorem follows. \end{proof} Recall {\em Kronecker's lemma}: If $a_n\uparrow\infty$ and $\sum_{n=1}^{\infty} {X_n}/{a_n}$ converges a.s., then\\ $(\sum_{m=1}^n X_m)/{a_n} \ascv 0$ . Let $X_1,X_2,\ldots$ be independent with mean 0 and $S_n=X_1+X_2+\cdots+X_n$. If $\sum_{n=1}^{\infty} {\E(X_n^2)}/{a_n^2}<\infty$, then by the basic ${\cal L}^2$ convergence theorem $\sum_{n=1}^{\infty} {X_n}/{a_n}$ converges a.s. Then ${S_n}/{a_n}\rightarrow0$ a.s. \begin{example} Let $X_1,X_2,\ldots$ be i.i.d., $\E(X_i)=0$, and $\E(X_i^2)=\sigma^2<\infty$.\\ Take $a_n=n$: \[\sum_{n=1}^{\infty}\frac{\sigma^2}{n^2}<\infty\;\Rightarrow\;\frac{S_n}{n}\stackrel{a.s.}{\rightarrow}0.\] Now take $a_n=n^{\frac{1}{2}+\epsilon}$, $\epsilon>0$: \[\sum_{n=1}^{\infty}\frac{\sigma^2}{n^{1+2\epsilon}}<\infty\;\Rightarrow\;\frac{S_n}{n^{\frac{1}{2}+\epsilon}}\stackrel{a.s.}{\rightarrow}0.\] \end{example} \bibliographystyle{plain} \bibliography{../books} \end{document}